function [b,file]=Test(Smooth,Num)
file={'Arts';'Business';'Computers';'Education';'Entertainment';'Health';'Recreation';'Reference';'Science';'Social';'Society'};
houzuiD='_data.mat';
houzuiT='_test.mat';
[numF,a]=size(file);
b=zeros(numF,5);
for i=1:numF
train=strcat(file{i,1},houzuiD);
test=strcat(file{i,1},houzuiT);
[HammingLoss,RankingLoss,OneError,Coverage,Average_Precision,Outputs,Pre_Labels]=testML1(train,test,Num,Smooth);
b(i,1)=HammingLoss;
b(i,2)=RankingLoss;
b(i,3)=OneError;
b(i,4)=Coverage;
b(i,5)=Average_Precision;
end
end
上面是实验平台的测试代码,无聊之极
下面是KNN方法的完整版
首先是KNNB
function[Neighbors]=KNNB(train_data,Num)%precL是一个[numTest]的矩阵,是一个conference%binP是一个二元的划分,不知道怎么说%train_data是一个[numTrain,numA]的矩阵%以此类推train_target,test_data%特例train_target是一个numLabel*1的向量[num_training,num_class]=size(train_data);
%Computing distance between training instances
dist_matrix=diag(realmax*ones(1,num_training));
fori=1:num_training-1if(mod(i,100)==0)disp(strcat('computing distance for instance:',num2str(i)));
end
vector1=train_data(i,:);
forj=i+1:num_training
vector2=train_data(j,:);
dist_matrix(i,j)=sqrt(sum((vector1-vector2).^2));
dist_matrix(j,i)=dist_matrix(i,j);
endendfori=1:num_training
[temp,index]=sort(dist_matrix(i,:));
Neighbors{i,1}=index(1:Num);
endend
file={'Arts';'Business';'Computers';'Education';'Entertainment';'Health';'Recreation';'Reference';'Science';'Social';'Society'};
houzuiD='_data.mat';
houzuiT='_test.mat';
[numF,a]=size(file);
b=zeros(numF,5);
for i=1:numF
train=strcat(file{i,1},houzuiD);
test=strcat(file{i,1},houzuiT);
[HammingLoss,RankingLoss,OneError,Coverage,Average_Precision,Outputs,Pre_Labels]=testML1(train,test,Num,Smooth);
b(i,1)=HammingLoss;
b(i,2)=RankingLoss;
b(i,3)=OneError;
b(i,4)=Coverage;
b(i,5)=Average_Precision;
end
end
上面是实验平台的测试代码,无聊之极
下面是KNN方法的完整版
首先是KNNB